load dataset - simple contrast (dummy coding) stim Pain

mount_dir = '/Volumes/spacetop_projects_social/analysis/fmri/spm/univariate/model-02_CcEScA/1stLevel'
mount_dir = '/Volumes/spacetop_projects_social/analysis/fmri/spm/univariate/model-02_CcEScA/1stLevel'
con_list = dir(fullfile(mount_dir, '*/con_0024.nii'));
spm('Defaults','fMRI')
con_fldr = {con_list.folder}; fname = {con_list.name};
con_files = strcat(con_fldr,'/', fname)';
con_data_obj = fmri_data(con_files);
Using default mask: /Users/h/Documents/MATLAB/CanlabCore/CanlabCore/canlab_canonical_brains/Canonical_brains_surfaces/brainmask_canlab.nii
sampleto = '/Volumes/spacetop_projects_social/analysis/fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0003/con_0024.nii'
loading mask. mapping volumes. checking that dimensions and voxel sizes of volumes are the same. Pre-allocating data array. Needed: 24360228 bytes Loading image number: 61 Elapsed time is 115.872916 seconds. Image names entered, but fullpath attribute is empty. Getting path info. Number of unique values in dataset: 5975338 Bit rate: 22.51 bits
contrast_name = {'cue_P', 'cue_V', 'cue_C', 'cue_G',...
'cueXcue_P', 'cueXcue_V', 'cueXcue_C', 'cueXcue_G',...
'stim_P', 'stim_V', 'stim_C', 'stim_G',...
'stimXcue_P', 'stimXcue_V', 'stimXcue_C', 'stimXcue_G',...
'motor', ...
'simple_cue_P', 'simple_cue_V', 'simple_cue_C',...
'simple_cueXcue_P', 'simple_cueXcue_V', 'simple_cueXcue_C', ...
'simple_stim_P', 'simple_stim_V', 'simple_stim_C',...
'simple_stimXcue_P', 'simple_stimXcue_V', 'simple_stimXcue_C'};

check data coverage

m = mean(con_data_obj);
m.dat = sum(~isnan(con_data_obj.dat) & con_data_obj.dat ~= 0, 2);
orthviews(m, 'trans') % display
SPM12: spm_check_registration (v7759) 23:43:23 - 25/05/2022 ======================================================================== Display /Users/h/Documents/MATLAB/CanlabCore/CanlabCore/canlab_canonical_brains/Canonical_brains_surfaces/keuken_2014_enhanced_for_underlay.img,1
Grouping contiguous voxels: 1 regions
ans = 1×1 cell array
{1×1 region}

Plot diagnostics, before l2norm

drawnow; snapnow
[wh_outlier_uncorr, wh_outlier_corr] = plot(con_data_obj)
______________________________________________________________ Outlier analysis ______________________________________________________________ global mean | global mean to var | spatial MAD | Missing values | 0 images Retained 10 components for mahalanobis distance Expected 50% of points within 50% normal ellipsoid, found 27.87% Expected 3.05 outside 95% ellipsoid, found 14 Potential outliers based on mahalanobis distance: Bonferroni corrected: 7 images Cases 10 26 28 30 43 46 57 Uncorrected: 14 images Cases 7 10 18 26 28 30 35 37 42 43 46 47 48 57 Retained 17 components for mahalanobis distance Expected 50% of points within 50% normal ellipsoid, found 50.82% Expected 3.05 outside 95% ellipsoid, found 0 Potential outliers based on mahalanobis distance: Bonferroni corrected: 0 images Cases Uncorrected: 0 images Cases Mahalanobis (cov and corr, q<0.05 corrected): 7 images Outlier_count Percentage _____________ __________ global_mean 2 3.2787 global_mean_to_variance 2 3.2787 missing_values 0 0 rmssd_dvars 0 0 spatial_variability 2 3.2787 mahal_cov_uncor 14 22.951 mahal_cov_corrected 7 11.475 mahal_corr_uncor 0 0 mahal_corr_corrected 0 0 Overall_uncorrected 14 22.951 Overall_corrected 7 11.475
SPM12: spm_check_registration (v7759) 23:43:57 - 25/05/2022 ======================================================================== Display /Users/h/Documents/MATLAB/CanlabCore/CanlabCore/canlab_canonical_brains/Canonical_brains_surfaces/keuken_2014_enhanced_for_underlay.img,1 (all) /Users/h/Documents/MATLAB/CanlabCore/CanlabCore/canlab_canonical_brains/Canonical_brains_surfaces/keuken_2014_enhanced_for_underlay.img,1 /Users/h/Documents/MATLAB/CanlabCore/CanlabCore/canlab_canonical_brains/Canonical_brains_surfaces/keuken_2014_enhanced_for_underlay.img,1
Grouping contiguous voxels: 1 regions
Grouping contiguous voxels: 1 regions
Grouping contiguous voxels: 1 regions
wh_outlier_uncorr = 61×1 logical array
0 0 0 0 0 0 1 0 0 1
wh_outlier_corr = 61×1 logical array
0 0 0 0 0 0 0 0 0 1

set(gcf,'Visible','on')
figure ('Visible', 'on');
drawnow, snapnow;

remove outliers based on plot

con = con_data_obj;
disp(strcat("current length is ", num2str(size(con_data_obj.dat,2))))
current length is 61
%for s = 1:length(wh_outlier_corr)
disp(strcat("-------subject", num2str(s), "------"))
-------subject61------
con.dat = con_data_obj.dat(:,~wh_outlier_corr);
con.image_names = con_data_obj.image_names(~wh_outlier_corr,:)
con =
fmri_data with properties: source_notes: 'Info about image source here' X: [] mask: [1×1 fmri_mask_image] mask_descrip: 'REMOVED: CHANGED SPACE' images_per_session: [] Y: [] Y_names: [] Y_descrip: 'Behavioral or outcome data matrix.' covariates: [] covariate_names: {''} covariates_descrip: 'Nuisance covariates associated with data' history_descrip: 'Cell array of names of methods applied to this data, in order' additional_info: [0×0 struct] metadata_table: [0×0 table] dat: [99837×54 single] dat_descrip: [] volInfo: [1×1 struct] removed_voxels: 0 removed_images: 0 image_names: [54×12 char] fullpath: [61×109 char] files_exist: [61×1 logical] history: {'' 'Sampled to space of /Volumes/spacetop_projects_social/analysis/fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0003/con_0024.nii' 'Masked with REMOVED: CHANGED SPACE' 'Checked image files exist. All exist = 1'}
con.fullpath = con_data_obj.fullpath(~wh_outlier_corr,:)
con =
fmri_data with properties: source_notes: 'Info about image source here' X: [] mask: [1×1 fmri_mask_image] mask_descrip: 'REMOVED: CHANGED SPACE' images_per_session: [] Y: [] Y_names: [] Y_descrip: 'Behavioral or outcome data matrix.' covariates: [] covariate_names: {''} covariates_descrip: 'Nuisance covariates associated with data' history_descrip: 'Cell array of names of methods applied to this data, in order' additional_info: [0×0 struct] metadata_table: [0×0 table] dat: [99837×54 single] dat_descrip: [] volInfo: [1×1 struct] removed_voxels: 0 removed_images: 0 image_names: [54×12 char] fullpath: [54×109 char] files_exist: [61×1 logical] history: {'' 'Sampled to space of /Volumes/spacetop_projects_social/analysis/fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0003/con_0024.nii' 'Masked with REMOVED: CHANGED SPACE' 'Checked image files exist. All exist = 1'}
con.files_exist = con_data_obj.files_exist(~wh_outlier_corr,:)
con =
fmri_data with properties: source_notes: 'Info about image source here' X: [] mask: [1×1 fmri_mask_image] mask_descrip: 'REMOVED: CHANGED SPACE' images_per_session: [] Y: [] Y_names: [] Y_descrip: 'Behavioral or outcome data matrix.' covariates: [] covariate_names: {''} covariates_descrip: 'Nuisance covariates associated with data' history_descrip: 'Cell array of names of methods applied to this data, in order' additional_info: [0×0 struct] metadata_table: [0×0 table] dat: [99837×54 single] dat_descrip: [] volInfo: [1×1 struct] removed_voxels: 0 removed_images: 0 image_names: [54×12 char] fullpath: [54×109 char] files_exist: [54×1 logical] history: {'' 'Sampled to space of /Volumes/spacetop_projects_social/analysis/fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0003/con_0024.nii' 'Masked with REMOVED: CHANGED SPACE' 'Checked image files exist. All exist = 1'}
%end
disp(strcat("after removing ", num2str(sum(wh_outlier_corr)), " participants, size is now ",num2str(size(con.dat,2))))
after removing 7 participants, size is now 54

plot diagnostics, after l2norm

imgs2 = con.rescale('l2norm_images');
l2norm
t = ttest(imgs2);
One-sample t-test Calculating t-statistics and p-values
orthviews(t)
SPM12: spm_check_registration (v7759) 23:44:02 - 25/05/2022 ======================================================================== Display /Users/h/Documents/MATLAB/CanlabCore/CanlabCore/canlab_canonical_brains/Canonical_brains_surfaces/keuken_2014_enhanced_for_underlay.img,1
ans = 1×1 cell array
{1×1 struct}
drawnow, snapnow;
fdr_t = threshold(t, .05, 'fdr');
Image 1 FDR q < 0.050 threshold is 0.006606 Image 1 104 contig. clusters, sizes 1 to 10129 Positive effect: 13144 voxels, min p-value: 0.00000000 Negative effect: 47 voxels, min p-value: 0.00025868
orthviews(fdr_t)
SPM12: spm_check_registration (v7759) 23:44:03 - 25/05/2022 ======================================================================== Display /Users/h/Documents/MATLAB/CanlabCore/CanlabCore/canlab_canonical_brains/Canonical_brains_surfaces/keuken_2014_enhanced_for_underlay.img,1
ans = 1×1 cell array
{1×104 struct}
drawnow, snapnow;
create_figure('montage'); axis off
montage(fdr_t)
Setting up fmridisplay objects
sagittal montage: 244 voxels displayed, 12947 not displayed on these slices
sagittal montage: 218 voxels displayed, 12973 not displayed on these slices
sagittal montage: 179 voxels displayed, 13012 not displayed on these slices
axial montage: 2451 voxels displayed, 10740 not displayed on these slices
axial montage: 2690 voxels displayed, 10501 not displayed on these slices
ans =
fmridisplay with properties: overlay: '/Users/h/Documents/MATLAB/CanlabCore/CanlabCore/canlab_canonical_brains/Canonical_brains_surfaces/keuken_2014_enhanced_for_underlay.img' SPACE: [1×1 struct] activation_maps: {[1×1 struct]} montage: {[1×1 struct] [1×1 struct] [1×1 struct] [1×1 struct] [1×1 struct]} surface: {} orthviews: {} history: {} history_descrip: [] additional_info: ''
drawnow, snapnow;
out = robfit_parcelwise(imgs2); drawnow; snapnow;
Loading atlas: CANlab_combined_atlas_object_2018_2mm.mat Initializing nodes to match regions. Updating node response data. Updating obj.connectivity.nodes. Updating obj.connectivity.nodes. Updating region averages. Updating obj.connectivity.regions. Updating obj.connectivity.regions. __________________________________________________________________ Input image diagnostic information __________________________________________________________________ Retained 9 components for mahalanobis distance Expected 50% of points within 50% normal ellipsoid, found 53.70% Expected 2.70 outside 95% ellipsoid, found 2 Potential outliers based on mahalanobis distance: Bonferroni corrected: 0 images Cases Uncorrected: 2 images Cases 10 32 Retained 3 components for mahalanobis distance Expected 50% of points within 50% normal ellipsoid, found 61.11% Expected 2.70 outside 95% ellipsoid, found 1 Potential outliers based on mahalanobis distance: Bonferroni corrected: 0 images Cases Uncorrected: 1 images Cases 19 Extracting from gray_matter_mask_sparse.img. Extracting from canonical_white_matter.img. Extracting from canonical_ventricles.img. mean_gray_matter_coverage: 1 global_d_ventricles: 0.6485 global_logp_ventricles: 3.7059 global_d_wm: 0.5382 global_logp_wm: 2.7984 gm_explained_by_csf_pvalue: 6.1238e-05 r2_explained_by_csf: 0.2679 gm_l2norm_explained_by_csf_pvalue: 1.0075e-09 r2_l2norm_explained_by_csf: 0.5152 csf_to_gm_signal_ratio: 1.4669 gm_scale_inhom: 0.1224 csf_scale_inhom: 0.2231 warnings: {1×5 cell} Warning: Significant global activation in CSF space/ventricles. - Effect size is d = 0.65 Warning: Significant global activation in white matter. - Effect size is d = 0.54 Warning: Gray-matter individual diffs significantly correlated with mean CSF value. - Var explained (r^2) = 26.79% Warning: Gray-matter scale (L2 norm) significantly correlated with mean CSF L2 norm. - Var explained (r^2) = 51.52% Warning: Strong non-zero signal in CSF relative to gray matter. - Ratio is = 1.47 Number of unique values in dataset: 1 Bit rate: 0.00 bits Warning: Number of unique values in dataset is low, indicating possible restriction of bit rate. For comparison, Int16 has 65,536 unique values Number of unique values in dataset: 1 Bit rate: 0.00 bits Warning: Number of unique values in dataset is low, indicating possible restriction of bit rate. For comparison, Int16 has 65,536 unique values Number of unique values in dataset: 1 Bit rate: 0.00 bits Warning: Number of unique values in dataset is low, indicating possible restriction of bit rate. For comparison, Int16 has 65,536 unique values __________________________________________________________________ Parcel-wise robust regression __________________________________________________________________ maxT minP sig05 sig005 sig001 sigFDR05 p_thr_FDR05 min_d_FDR05 ____ __________ _____ ______ ______ ________ ___________ ___________ Intercept (Group avg) 6.5 2.8729e-08 241 101 40 461 0.57596 -0.026444 sig*: Significant parcels at given threshold (p < 0.05 two-tailed, q < 0.05 FDR, etc.) p_thr_FDR05: P-value threshold to achieve q < 0.05 FDR-corrected for each predictor min_d_FDR05: Min Cohen's d detectable at FDR q < 0.05dashes __________________________________________________________________ Tables of regions at q < 0.05 FDR __________________________________________________________________ __________________________________________________________________ Predictor 1: Intercept (Group avg) __________________________________________________________________ Grouping contiguous voxels: 1 regions
48
____________________________________________________________________________________________________________________________________________ Positive Effects
sampleto = '/Users/h/Documents/MATLAB/CanlabCore/CanlabCore/canlab_canonical_brains/Canonical_brains_surfaces/brainmask.nii'
Region Volume XYZ maxZ modal_label_descriptions Perc_covered_by_label Atlas_regions_covered region_index ____________________ __________ ________________ ____ ________________________ _____________________ _____________________ ____________ {'Multiple regions'} 1.2947e+06 -2 -20 6 6.5 {'Cerebellum'} 2 455 1 Negative Effects Region Volume XYZ maxZ modal_label_descriptions Perc_covered_by_label Atlas_regions_covered region_index ____________________ ______ ________________ _______ ________________________ _____________________ _____________________ ____________ {'Multiple regions'} 13152 -4 30 -18 -2.7391 {'Cortex_Limbic'} 35 6 2
______________________________________________________________ Outlier analysis ______________________________________________________________ global mean | global mean to var | spatial MAD | Missing values | 0 images Retained 3 components for mahalanobis distance Expected 50% of points within 50% normal ellipsoid, found 61.11% Expected 2.70 outside 95% ellipsoid, found 1 Potential outliers based on mahalanobis distance: Bonferroni corrected: 0 images Cases Uncorrected: 1 images Cases 19 Retained 9 components for mahalanobis distance Expected 50% of points within 50% normal ellipsoid, found 53.70% Expected 2.70 outside 95% ellipsoid, found 2 Potential outliers based on mahalanobis distance: Bonferroni corrected: 0 images Cases Uncorrected: 2 images Cases 10 32 Mahalanobis (cov and corr, q<0.05 corrected): 0 images Outlier_count Percentage _____________ __________ global_mean 0 0 global_mean_to_variance 0 0 missing_values 0 0 rmssd_dvars 0 0 spatial_variability 0 0 mahal_cov_uncor 1 1.8519 mahal_cov_corrected 0 0 mahal_corr_uncor 2 3.7037 mahal_corr_corrected 0 0 Overall_uncorrected 3 5.5556 Overall_corrected 0 0
SPM12: spm_check_registration (v7759) 23:45:59 - 25/05/2022 ======================================================================== Display /Users/h/Documents/MATLAB/CanlabCore/CanlabCore/canlab_canonical_brains/Canonical_brains_surfaces/keuken_2014_enhanced_for_underlay.img,1 (all) /Users/h/Documents/MATLAB/CanlabCore/CanlabCore/canlab_canonical_brains/Canonical_brains_surfaces/keuken_2014_enhanced_for_underlay.img,1 /Users/h/Documents/MATLAB/CanlabCore/CanlabCore/canlab_canonical_brains/Canonical_brains_surfaces/keuken_2014_enhanced_for_underlay.img,1
Grouping contiguous voxels: 1 regions
Grouping contiguous voxels: 1 regions
Grouping contiguous voxels: 1 regions
drawnow, snapnow;

check neurosynth keywords

[image_by_feature_correlations, top_feature_tables] = neurosynth_feature_labels( imgs2, 'images_are_replicates', false, 'noverbose');
Input image 1 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0003/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ____________________ __________ _______________ -0.07541 {'music' } 0.11713 {'visual' } -0.07131 {'musical' } 0.10299 {'object' } -0.069844 {'pitch' } 0.092735 {'shape' } -0.067969 {'autobiographical'} 0.092512 {'eye' } -0.065922 {'rating' } 0.090645 {'objects' } -0.065893 {'auditory' } 0.084853 {'orientation'} -0.063514 {'acoustic' } 0.082204 {'motion' } -0.0634 {'placebo' } 0.078388 {'hand' } -0.06107 {'sounds' } 0.075859 {'spatial' } -0.060694 {'ratings' } 0.075185 {'attention' } Input image 2 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0004/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ______________ __________ _______________ -0.092859 {'pain' } 0.14325 {'visual' } -0.086624 {'regulation'} 0.11577 {'object' } -0.083682 {'rating' } 0.10752 {'eye' } -0.082445 {'affect' } 0.10614 {'objects' } -0.08167 {'positive' } 0.10178 {'spatial' } -0.078563 {'negative' } 0.095516 {'attention' } -0.076795 {'noxious' } 0.091816 {'shape' } -0.075933 {'ratings' } 0.087744 {'position' } -0.075873 {'painful' } 0.08246 {'orientation'} -0.073666 {'depression'} 0.082174 {'motion' } Input image 3 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0005/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ______________ __________ _________________ -0.10698 {'trait' } 0.088768 {'sexual' } -0.087343 {'person' } 0.082721 {'visual' } -0.082536 {'personal' } 0.080364 {'videos' } -0.078905 {'self' } 0.075641 {'grammatical' } -0.072404 {'conflict' } 0.073409 {'physiological'} -0.071273 {'social' } 0.069297 {'virtual' } -0.071082 {'decision' } 0.067474 {'unattended' } -0.069245 {'judgments' } 0.065964 {'orthographic' } -0.063068 {'approach' } 0.065877 {'presentations'} -0.062257 {'depression'} 0.065087 {'movie' } Input image 4 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0007/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ________________ __________ ______________ -0.13433 {'visual' } 0.10597 {'negative' } -0.12375 {'spatial' } 0.10557 {'affect' } -0.095736 {'eye' } 0.101 {'reward' } -0.095085 {'movements' } 0.099505 {'affective' } -0.09291 {'object' } 0.099499 {'outcome' } -0.088022 {'visuospatial'} 0.097606 {'regulation'} -0.087265 {'objects' } 0.091409 {'positive' } -0.083653 {'rotation' } 0.086048 {'social' } -0.080651 {'attention' } 0.084692 {'rating' } -0.080197 {'position' } 0.082992 {'stress' } Input image 5 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0008/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _____________ __________ ______________ -0.12915 {'spatial' } 0.10635 {'affect' } -0.10868 {'motion' } 0.096404 {'regulation'} -0.1056 {'frequency'} 0.094823 {'affective' } -0.10172 {'visual' } 0.094639 {'control' } -0.096855 {'space' } 0.093025 {'rating' } -0.096003 {'object' } 0.092303 {'noxious' } -0.094886 {'objects' } 0.091044 {'negative' } -0.090748 {'locations'} 0.088667 {'ratings' } -0.084246 {'eye' } 0.086123 {'reward' } -0.081778 {'position' } 0.085047 {'rewards' } Input image 6 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0009/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ________________ __________ ________________ -0.17221 {'hand' } 0.1697 {'memory' } -0.15459 {'movements' } 0.16172 {'retrieval' } -0.14396 {'frequency' } 0.13462 {'correct' } -0.12489 {'motion' } 0.12549 {'encoding' } -0.12435 {'finger' } 0.11143 {'verbal' } -0.12285 {'stimulation' } 0.11114 {'rule' } -0.12014 {'sensorimotor'} 0.11112 {'recollection'} -0.11698 {'motor' } 0.1099 {'episodic' } -0.11291 {'tactile' } 0.10951 {'probes' } -0.10663 {'muscle' } 0.10852 {'demand' } Input image 7 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0010/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _______________ __________ __________________ -0.10962 {'pair' } 0.12489 {'psychophysical'} -0.086758 {'judgment' } 0.11829 {'unpleasantness'} -0.075399 {'semantic' } 0.11513 {'somatosensory' } -0.074989 {'association'} 0.11194 {'lip' } -0.064693 {'drug' } 0.1109 {'effort' } -0.064467 {'person' } 0.10936 {'counting' } -0.063902 {'judgments' } 0.1084 {'articulatory' } -0.063646 {'age' } 0.10767 {'oral' } -0.062694 {'decision' } 0.10589 {'1back' } -0.060871 {'personal' } 0.10525 {'heat' } Input image 8 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0011/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _____________ __________ _________________ -0.052441 {'frequency'} 0.098554 {'sadness' } -0.050403 {'working' } 0.083547 {'sexual' } -0.047102 {'error' } 0.079533 {'reinforcement'} -0.046543 {'work' } 0.078748 {'incentive' } -0.045831 {'conflict' } 0.075908 {'eating' } -0.044956 {'sequence' } 0.073162 {'anticipatory' } -0.04489 {'wm' } 0.073151 {'olfactory' } -0.043288 {'executive'} 0.072628 {'drugs' } -0.041492 {'letter' } 0.071265 {'anger' } -0.039905 {'training' } 0.070273 {'happy' } Input image 9 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0013/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ____________________ __________ ________________ -0.1435 {'retrieved' } 0.13451 {'hand' } -0.13075 {'choices' } 0.12402 {'movements' } -0.13016 {'money' } 0.11582 {'motor' } -0.12914 {'autobiographical'} 0.11439 {'finger' } -0.12692 {'selfreferential' } 0.11246 {'position' } -0.12394 {'episodic' } 0.10603 {'frequency' } -0.12246 {'retrieval' } 0.1034 {'sequence' } -0.12181 {'reward' } 0.10235 {'stimulation' } -0.11822 {'default' } 0.10138 {'sensorimotor'} -0.11499 {'positive' } 0.086424 {'muscle' } Input image 10 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0015/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _______________ __________ _________________ -0.39208 {'visual' } 0.3536 {'pain' } -0.32232 {'object' } 0.31274 {'somatosensory'} -0.31275 {'objects' } 0.31166 {'painful' } -0.26611 {'shape' } 0.278 {'noxious' } -0.26524 {'recognition'} 0.27448 {'motor' } -0.22961 {'priming' } 0.26055 {'stimulation' } -0.21649 {'faces' } 0.25389 {'sensation' } -0.21094 {'attention' } 0.25017 {'heat' } -0.20979 {'scene' } 0.24741 {'muscle' } -0.20763 {'shapes' } 0.21508 {'foot' } Input image 11 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0016/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ________________ __________ ______________ -0.099264 {'articulatory'} 0.11085 {'dopamine' } -0.098204 {'unattended' } 0.10272 {'drug' } -0.096077 {'phonetic' } 0.1013 {'reward' } -0.093439 {'addiction' } 0.099705 {'outcome' } -0.093439 {'attribution' } 0.093531 {'expression'} -0.093439 {'craving' } 0.09095 {'loss' } -0.093439 {'episode' } 0.089919 {'disorder' } -0.093439 {'houses' } 0.0899 {'anxiety' } -0.093439 {'nonwords' } 0.086915 {'affective' } -0.093439 {'oscillations'} 0.084449 {'affect' } Input image 12 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0017/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ______________ __________ _____________ -0.16764 {'auditory' } 0.13913 {'work' } -0.14505 {'sound' } 0.13813 {'memory' } -0.13687 {'speech' } 0.12618 {'executive'} -0.13429 {'sounds' } 0.12042 {'working' } -0.13226 {'noise' } 0.11343 {'rule' } -0.11595 {'acoustic' } 0.11191 {'control' } -0.1117 {'pitch' } 0.11081 {'correct' } -0.10654 {'listening' } 0.10669 {'conflict' } -0.10531 {'music' } 0.097952 {'2back' } -0.10262 {'production'} 0.095853 {'switch' } Input image 13 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0018/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ __________________ __________ _______________ -0.13891 {'1back' } 0.14642 {'visual' } -0.1377 {'probes' } 0.13036 {'motion' } -0.12982 {'temperature' } 0.10346 {'eye' } -0.12816 {'control' } 0.10182 {'object' } -0.12564 {'counting' } 0.10132 {'space' } -0.11506 {'monitoring' } 0.097769 {'objects' } -0.11301 {'gonogo' } 0.09442 {'animal' } -0.1128 {'nback' } 0.094179 {'images' } -0.11042 {'unpleasantness'} 0.093662 {'orientation'} -0.1102 {'nogo' } 0.090217 {'female' } Input image 14 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0019/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ______________ __________ _________________ -0.042742 {'correct' } 0.049978 {'sexual' } -0.039233 {'monitoring'} 0.044813 {'animals' } -0.036774 {'control' } 0.043709 {'conditioned' } -0.036683 {'conflict' } 0.039641 {'eating' } -0.033513 {'working' } 0.038981 {'autistic' } -0.032333 {'demands' } 0.03722 {'bottomup' } -0.030002 {'work' } 0.035765 {'women' } -0.029935 {'errors' } 0.035127 {'depressive' } -0.029227 {'sentence' } 0.034554 {'conditioning' } -0.02843 {'go' } 0.034248 {'reinforcement'} Input image 15 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0020/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _________________ __________ ______________ -0.06583 {'motor' } 0.08936 {'negative' } -0.065276 {'imagery' } 0.087588 {'outcome' } -0.065098 {'movements' } 0.0845 {'neutral' } -0.06282 {'finger' } 0.082015 {'affect' } -0.060702 {'production' } 0.078143 {'trait' } -0.058912 {'overt' } 0.077566 {'reward' } -0.058899 {'hand' } 0.076415 {'affective' } -0.058162 {'muscle' } 0.07286 {'anxiety' } -0.057629 {'somatosensory'} 0.072446 {'positive' } -0.056074 {'auditory' } 0.07159 {'regulation'} Input image 16 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0021/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ______________ __________ _________________ -0.078716 {'disorder' } 0.11098 {'anticipatory' } -0.076731 {'depression'} 0.10797 {'motor' } -0.075917 {'images' } 0.1066 {'sequential' } -0.075347 {'age' } 0.10541 {'articulatory' } -0.066107 {'syntactic' } 0.10468 {'voluntary' } -0.063966 {'faces' } 0.10284 {'reinforcement'} -0.062122 {'pair' } 0.099506 {'incentive' } -0.061169 {'work' } 0.097087 {'velocity' } -0.060429 {'face' } 0.096777 {'limb' } -0.059296 {'approach' } 0.096415 {'preparatory' } Input image 17 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0023/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _____________ __________ ______________ -0.1332 {'visual' } 0.12878 {'affect' } -0.12331 {'object' } 0.12228 {'regulation'} -0.1233 {'objects' } 0.10696 {'reward' } -0.10866 {'spatial' } 0.10631 {'negative' } -0.099005 {'motion' } 0.10551 {'affective' } -0.091878 {'shape' } 0.10229 {'outcome' } -0.091019 {'mirror' } 0.10066 {'emotion' } -0.077045 {'attention'} 0.09929 {'rating' } -0.073474 {'eye' } 0.098775 {'neutral' } -0.073407 {'actions' } 0.09555 {'trait' } Input image 18 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0024/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ________________ __________ ______________ -0.10401 {'articulatory'} 0.1046 {'drug' } -0.10002 {'gesture' } 0.09753 {'anxiety' } -0.099231 {'lip' } 0.08515 {'risk' } -0.097767 {'gestures' } 0.084991 {'affective' } -0.09765 {'phonetic' } 0.084071 {'affect' } -0.093119 {'oral' } 0.084013 {'dopamine' } -0.091811 {'unattended' } 0.083244 {'disorder' } -0.090918 {'grammatical' } 0.077503 {'outcome' } -0.089431 {'autistic' } 0.075591 {'trait' } -0.088628 {'visual' } 0.074458 {'depression'} Input image 19 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0028/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _______________ __________ ________________ -0.29619 {'trait' } 0.34703 {'motor' } -0.28084 {'person' } 0.34376 {'movements' } -0.27806 {'personal' } 0.32288 {'execution' } -0.26429 {'age' } 0.31886 {'hand' } -0.2439 {'disorder' } 0.30213 {'finger' } -0.23798 {'social' } 0.28198 {'sensorimotor'} -0.23527 {'positive' } 0.2473 {'preparation' } -0.23523 {'depression' } 0.24614 {'tapping' } -0.22468 {'negative' } 0.2406 {'hands' } -0.22456 {'association'} 0.23059 {'imagery' } Input image 20 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0029/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _______________ __________ _______________ -0.12046 {'sound' } 0.13776 {'rule' } -0.11855 {'noise' } 0.13576 {'executive' } -0.10649 {'auditory' } 0.13363 {'control' } -0.10421 {'frequency' } 0.12666 {'1back' } -0.1015 {'acoustic' } 0.12346 {'rules' } -0.08717 {'sounds' } 0.10758 {'working' } -0.083404 {'integration'} 0.10637 {'switch' } -0.081173 {'motion' } 0.105 {'calculation'} -0.078938 {'stimulation'} 0.10194 {'planning' } -0.07706 {'male' } 0.10063 {'nback' } Input image 21 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0030/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ______________ __________ _______________ -0.094453 {'age' } 0.10748 {'calculation'} -0.08429 {'male' } 0.10096 {'nogo' } -0.082042 {'trait' } 0.10066 {'maintenance'} -0.079367 {'self' } 0.094199 {'1back' } -0.074064 {'depression'} 0.090693 {'counting' } -0.071132 {'images' } 0.086643 {'go' } -0.068163 {'personal' } 0.084531 {'card' } -0.067054 {'judgment' } 0.082795 {'preparatory'} -0.06519 {'animal' } 0.080483 {'solving' } -0.064252 {'time' } 0.078161 {'load' } Input image 22 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0031/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _______________ __________ __________________ -0.13836 {'pair' } 0.15717 {'somatosensory' } -0.1255 {'judgment' } 0.14839 {'articulatory' } -0.11659 {'correct' } 0.14322 {'motor' } -0.10957 {'judgments' } 0.14278 {'heat' } -0.095915 {'intention' } 0.14237 {'noxious' } -0.087793 {'trait' } 0.13949 {'unpleasantness'} -0.087339 {'work' } 0.13838 {'painrelated' } -0.087029 {'decision' } 0.13558 {'tapping' } -0.084902 {'demand' } 0.13526 {'anticipatory' } -0.084583 {'recognition'} 0.13319 {'vocal' } Input image 23 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0032/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ______________ __________ ________________ -0.094079 {'semantic' } 0.15766 {'reward' } -0.082843 {'memory' } 0.1314 {'rewards' } -0.072436 {'sentence' } 0.13017 {'aversive' } -0.07181 {'sentences' } 0.12321 {'ratings' } -0.071214 {'word' } 0.12145 {'money' } -0.0701 {'spatial' } 0.11559 {'anticipation'} -0.069054 {'words' } 0.11361 {'food' } -0.067883 {'pairs' } 0.11133 {'monetary' } -0.067058 {'familiar' } 0.11015 {'rating' } -0.066906 {'perceptual'} 0.10739 {'incentive' } Input image 24 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0033/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _________________ __________ _______________ -0.12948 {'age' } 0.14031 {'execution' } -0.1058 {'judgment' } 0.13694 {'nogo' } -0.1049 {'association' } 0.13468 {'motor' } -0.099982 {'images' } 0.1325 {'heat' } -0.098179 {'trait' } 0.12626 {'preparatory'} -0.096949 {'sentences' } 0.12152 {'noxious' } -0.09627 {'sentence' } 0.11814 {'preparation'} -0.095623 {'semantic' } 0.11729 {'counting' } -0.09476 {'comprehension'} 0.11723 {'feedback' } -0.092887 {'recognition' } 0.11552 {'go' } Input image 25 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0036/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ____________________ __________ _______________ -0.096035 {'memory' } 0.065652 {'noise' } -0.086838 {'memories' } 0.058799 {'sound' } -0.085894 {'retrieval' } 0.056974 {'frequency' } -0.084461 {'selfreferential' } 0.053322 {'perception' } -0.080383 {'mentalizing' } 0.050574 {'auditory' } -0.079441 {'recollection' } 0.048099 {'speech' } -0.077093 {'probes' } 0.044709 {'probability'} -0.075874 {'1back' } 0.044235 {'sequence' } -0.074526 {'autobiographical'} 0.043508 {'adaptation' } -0.073598 {'regulation' } 0.042549 {'visual' } Input image 26 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0038/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ________________ __________ ____________ -0.13529 {'phonology' } 0.11428 {'dopamine'} -0.11523 {'phonological'} 0.096165 {'male' } -0.10904 {'orthographic'} 0.091982 {'images' } -0.10767 {'word' } 0.090964 {'anxiety' } -0.10764 {'language' } 0.090877 {'drug' } -0.10255 {'verb' } 0.090552 {'value' } -0.10252 {'words' } 0.086389 {'outcome' } -0.10193 {'readers' } 0.08337 {'disorder'} -0.10149 {'counting' } 0.080822 {'loss' } -0.098211 {'lexical' } 0.080465 {'allele' } Input image 27 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0040/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _________________ __________ _______________ -0.13606 {'word' } 0.11048 {'body' } -0.13275 {'sentences' } 0.10969 {'motion' } -0.13192 {'semantic' } 0.10106 {'visual' } -0.1311 {'words' } 0.099564 {'movements' } -0.13035 {'sentence' } 0.097106 {'stimulation'} -0.1293 {'knowledge' } 0.095615 {'hand' } -0.11296 {'comprehension'} 0.089981 {'tactile' } -0.11147 {'demand' } 0.08865 {'muscle' } -0.10991 {'memory' } 0.087639 {'eye' } -0.10921 {'judgment' } 0.0861 {'rotation' } Input image 28 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0041/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _______________ __________ _________________ -0.18036 {'images' } 0.22006 {'counting' } -0.16715 {'frequency' } 0.1771 {'1back' } -0.13649 {'locations' } 0.17566 {'painrelated' } -0.12642 {'orientation'} 0.17383 {'preparatory' } -0.11664 {'motion' } 0.17345 {'phonology' } -0.11115 {'age' } 0.17293 {'anticipatory' } -0.10035 {'adaptation' } 0.16958 {'retrieved' } -0.09347 {'eye' } 0.16556 {'incentive' } -0.092407 {'trait' } 0.16502 {'reinforcement'} -0.089827 {'disorder' } 0.16053 {'heat' } Input image 29 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0043/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _______________ __________ _________________ -0.11651 {'semantic' } 0.085546 {'stimulation' } -0.11336 {'words' } 0.083883 {'pain' } -0.11238 {'word' } 0.082127 {'somatosensory'} -0.10674 {'episodic' } 0.079223 {'motor' } -0.10202 {'judgments' } 0.071996 {'feedback' } -0.096259 {'judgment' } 0.067908 {'painful' } -0.096005 {'recognition'} 0.065226 {'movements' } -0.094844 {'perceptual' } 0.061314 {'finger' } -0.089438 {'retrieval' } 0.061214 {'tactile' } -0.08667 {'memory' } 0.061127 {'sensorimotor' } Input image 30 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0044/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ______________ __________ _______________ -0.16298 {'visual' } 0.081176 {'acquisition'} -0.12253 {'autistic' } 0.073773 {'affective' } -0.12005 {'rotation' } 0.069925 {'affect' } -0.11971 {'unattended'} 0.066965 {'anxiety' } -0.11842 {'colour' } 0.064064 {'approach' } -0.11682 {'videos' } 0.06286 {'age' } -0.11082 {'attention' } 0.062522 {'trait' } -0.10891 {'object' } 0.061003 {'outcome' } -0.10285 {'shapes' } 0.060286 {'sound' } -0.10259 {'objects' } 0.059751 {'drug' } Input image 31 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0046/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ______________ __________ ______________ -0.15143 {'movements' } 0.21874 {'reward' } -0.14596 {'frequency' } 0.19874 {'negative' } -0.14276 {'hand' } 0.17644 {'valence' } -0.14015 {'position' } 0.17627 {'positive' } -0.13777 {'spatial' } 0.17349 {'regulation'} -0.13432 {'visual' } 0.17216 {'rewards' } -0.13059 {'noise' } 0.16306 {'emotion' } -0.13027 {'motor' } 0.1601 {'neutral' } -0.12464 {'finger' } 0.15714 {'affect' } -0.12338 {'adaptation'} 0.15525 {'rating' } Input image 32 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0047/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _______________ __________ _________________ -0.27796 {'recognition'} 0.37015 {'somatosensory'} -0.24942 {'episodic' } 0.35984 {'motor' } -0.24489 {'memory' } 0.33368 {'pain' } -0.2383 {'object' } 0.31413 {'stimulation' } -0.23456 {'person' } 0.30777 {'painful' } -0.23255 {'objects' } 0.29561 {'muscle' } -0.23248 {'scene' } 0.29302 {'production' } -0.22341 {'retrieval' } 0.28431 {'sensory' } -0.22268 {'judgments' } 0.27934 {'sensorimotor' } -0.21803 {'visual' } 0.27539 {'noxious' } Input image 33 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0050/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ________________ __________ ______________ -0.13595 {'movements' } 0.14362 {'affect' } -0.13588 {'execution' } 0.13167 {'affective' } -0.13516 {'motor' } 0.12906 {'emotion' } -0.12854 {'planning' } 0.12358 {'disorder' } -0.11644 {'preparation' } 0.12061 {'age' } -0.11584 {'hand' } 0.11763 {'neutral' } -0.11485 {'sensorimotor'} 0.11723 {'expression'} -0.10893 {'finger' } 0.11679 {'trait' } -0.10667 {'imagery' } 0.1158 {'anxiety' } -0.10372 {'preparatory' } 0.11504 {'faces' } Input image 34 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0051/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ______________ __________ ________________ -0.1743 {'frequency' } 0.21387 {'grammatical' } -0.12362 {'images' } 0.20503 {'retrieved' } -0.1188 {'time' } 0.20194 {'sexual' } -0.118 {'age' } 0.20034 {'autistic' } -0.098838 {'risk' } 0.19811 {'counting' } -0.095216 {'noise' } 0.19482 {'phonology' } -0.09484 {'adaptation'} 0.19084 {'preparatory' } -0.093923 {'trait' } 0.18666 {'anticipatory'} -0.086922 {'space' } 0.18582 {'1back' } -0.086046 {'disorder' } 0.17918 {'engagement' } Input image 35 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0052/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _______________ __________ __________________ -0.078905 {'age' } 0.098313 {'colour' } -0.06879 {'syntactic' } 0.091844 {'videos' } -0.064608 {'sentence' } 0.081996 {'movie' } -0.06365 {'disorder' } 0.081634 {'anticipatory' } -0.063196 {'sentences' } 0.081326 {'psychophysical'} -0.057587 {'depression' } 0.081062 {'preparatory' } -0.057535 {'work' } 0.077734 {'unattended' } -0.055958 {'wm' } 0.072941 {'reinforcement' } -0.05486 {'association'} 0.072482 {'habituation' } -0.050833 {'approach' } 0.072328 {'empathic' } Input image 36 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0053/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _________________ __________ ________________ -0.10962 {'sentences' } 0.14666 {'execution' } -0.10377 {'comprehension'} 0.13915 {'movements' } -0.097893 {'sentence' } 0.1304 {'hand' } -0.09397 {'noun' } 0.12683 {'finger' } -0.090948 {'association' } 0.1263 {'motor' } -0.09049 {'probability' } 0.12045 {'sensorimotor'} -0.088119 {'word' } 0.11878 {'sequential' } -0.087618 {'language' } 0.11376 {'hands' } -0.087308 {'syntactic' } 0.10688 {'action' } -0.085011 {'words' } 0.10298 {'fingers' } Input image 37 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0055/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _______________ __________ _______________ -0.11188 {'videos' } 0.095288 {'error' } -0.11002 {'photographs'} 0.093464 {'inhibition' } -0.10666 {'visual' } 0.089447 {'muscle' } -0.10577 {'mentalizing'} 0.082759 {'errors' } -0.10463 {'social' } 0.079542 {'stimulation'} -0.1038 {'scene' } 0.077127 {'working' } -0.10202 {'objects' } 0.076708 {'go' } -0.10164 {'tom' } 0.076414 {'nogo' } -0.098926 {'object' } 0.073944 {'work' } -0.09819 {'motion' } 0.073537 {'motor' } Input image 38 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0056/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _______________ __________ _______________ -0.068784 {'noxious' } 0.072834 {'objects' } -0.066668 {'heat' } 0.062671 {'object' } -0.065055 {'sensation' } 0.061647 {'faces' } -0.064416 {'monitoring' } 0.060114 {'face' } -0.060787 {'pain' } 0.057546 {'fearful' } -0.059764 {'painrelated'} 0.054691 {'expressions'} -0.056476 {'load' } 0.05289 {'facial' } -0.054831 {'temperature'} 0.048751 {'expression' } -0.051734 {'errors' } 0.047586 {'familiar' } -0.050674 {'rule' } 0.04633 {'sentence' } Input image 39 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0058/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _______________ __________ _______________ -0.13807 {'heat' } 0.096588 {'eye' } -0.12542 {'painful' } 0.093707 {'orientation'} -0.12123 {'noxious' } 0.090131 {'trait' } -0.12017 {'counting' } 0.089169 {'shape' } -0.11786 {'pain' } 0.08596 {'motion' } -0.11077 {'painrelated'} 0.080231 {'time' } -0.10915 {'1back' } 0.078488 {'visual' } -0.10854 {'temperature'} 0.075905 {'loss' } -0.10108 {'tapping' } 0.073519 {'risk' } -0.097994 {'nogo' } 0.07272 {'images' } Input image 40 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0059/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ________________ __________ ____________________ -0.24636 {'movements' } 0.18839 {'personal' } -0.22383 {'execution' } 0.18265 {'depression' } -0.21919 {'motor' } 0.17779 {'positive' } -0.20196 {'hand' } 0.1776 {'self' } -0.18888 {'action' } 0.17738 {'negative' } -0.17921 {'sensory' } 0.1767 {'person' } -0.17915 {'sensorimotor'} 0.17546 {'retrieval' } -0.17649 {'actions' } 0.17017 {'memories' } -0.17569 {'finger' } 0.17017 {'autobiographical'} -0.17086 {'position' } 0.16613 {'age' } Input image 41 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0061/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _________________ __________ _________________ -0.18265 {'trait' } 0.22282 {'motor' } -0.15152 {'depression' } 0.1893 {'sensorimotor' } -0.14535 {'disorder' } 0.18862 {'movements' } -0.13641 {'risk' } 0.18543 {'somatosensory'} -0.13428 {'person' } 0.18541 {'sensory' } -0.13096 {'social' } 0.1782 {'tapping' } -0.1307 {'personal' } 0.17607 {'execution' } -0.12617 {'schizophrenia'} 0.17287 {'finger' } -0.12419 {'outcome' } 0.17018 {'articulatory' } -0.12376 {'age' } 0.16871 {'production' } Input image 42 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0062/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ __________________ __________ ____________________ -0.069478 {'color' } 0.11094 {'person' } -0.068766 {'working' } 0.094881 {'default' } -0.064826 {'maintenance' } 0.089475 {'self' } -0.06446 {'arithmetic' } 0.084076 {'personal' } -0.064275 {'control' } 0.077773 {'autobiographical'} -0.062239 {'numbers' } 0.076391 {'social' } -0.062016 {'discrimination'} 0.075942 {'selfreferential' } -0.0614 {'rule' } 0.075086 {'stories' } -0.06055 {'error' } 0.070241 {'trait' } -0.056878 {'work' } 0.068382 {'mentalizing' } Input image 43 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0064/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _________________ __________ _______________ -0.11003 {'sensory' } 0.080746 {'semantic' } -0.10829 {'somatosensory'} 0.077146 {'words' } -0.10051 {'heat' } 0.073183 {'word' } -0.098385 {'noxious' } 0.071717 {'animal' } -0.094485 {'painful' } 0.067736 {'images' } -0.088369 {'motor' } 0.061571 {'objects' } -0.087678 {'nociceptive' } 0.060516 {'recognition'} -0.086955 {'temperature' } 0.058951 {'association'} -0.086207 {'pain' } 0.056794 {'object' } -0.085105 {'voluntary' } 0.0552 {'english' } Input image 44 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0065/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _______________ __________ _________________ -0.10388 {'pair' } 0.13935 {'motor' } -0.093705 {'judgment' } 0.12882 {'somatosensory'} -0.09229 {'attentional'} 0.12777 {'tapping' } -0.081134 {'attention' } 0.12433 {'muscle' } -0.080588 {'correct' } 0.12006 {'limb' } -0.078592 {'object' } 0.12001 {'sensorimotor' } -0.078245 {'color' } 0.11875 {'articulatory' } -0.074459 {'pairs' } 0.11582 {'finger' } -0.074034 {'objects' } 0.11058 {'fingers' } -0.072564 {'decision' } 0.1095 {'lip' } Input image 45 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0066/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _______________ __________ ________________ -0.13899 {'sound' } 0.12895 {'control' } -0.13772 {'auditory' } 0.11861 {'working' } -0.12678 {'sounds' } 0.11229 {'work' } -0.11692 {'noise' } 0.11066 {'executive' } -0.11565 {'acoustic' } 0.10922 {'2back' } -0.1047 {'integration'} 0.10715 {'interference'} -0.1042 {'sentence' } 0.10507 {'conflict' } -0.10013 {'sentences' } 0.091759 {'load' } -0.099972 {'audiovisual'} 0.089286 {'wm' } -0.098512 {'speech' } 0.088978 {'monitoring' } Input image 46 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0068/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ______________ __________ ________________ -0.24742 {'trait' } 0.29301 {'movements' } -0.23709 {'negative' } 0.27994 {'execution' } -0.23222 {'positive' } 0.27137 {'motor' } -0.22348 {'affect' } 0.26182 {'visual' } -0.22222 {'emotion' } 0.2538 {'hand' } -0.22011 {'outcome' } 0.24763 {'finger' } -0.21963 {'depression'} 0.24436 {'sensorimotor'} -0.21809 {'anxiety' } 0.22794 {'imagery' } -0.21619 {'person' } 0.21747 {'overt' } -0.21585 {'personal' } 0.20487 {'hands' } Input image 47 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0069/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _______________ __________ ____________________ -0.12701 {'visual' } 0.1139 {'memories' } -0.097098 {'motion' } 0.10327 {'memory' } -0.09324 {'eye' } 0.10035 {'retrieved' } -0.075669 {'target' } 0.096568 {'episodic' } -0.072725 {'color' } 0.093657 {'retrieval' } -0.072602 {'probability'} 0.092027 {'remember' } -0.067074 {'shape' } 0.090662 {'noxious' } -0.066952 {'position' } 0.089405 {'spontaneous' } -0.063091 {'saccade' } 0.089369 {'autobiographical'} -0.061496 {'spatial' } 0.089233 {'1back' } Input image 48 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0070/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ __________________ __________ ____________ -0.09586 {'control' } 0.081399 {'motion' } -0.093252 {'counting' } 0.077774 {'visual' } -0.086912 {'temperature' } 0.076652 {'spatial' } -0.0855 {'1back' } 0.069115 {'noise' } -0.084173 {'production' } 0.06754 {'space' } -0.083618 {'unpleasantness'} 0.065525 {'eye' } -0.081443 {'articulatory' } 0.061195 {'images' } -0.080689 {'working' } 0.059091 {'personal'} -0.080438 {'executive' } 0.058436 {'scene' } -0.080036 {'heat' } 0.056397 {'shape' } Input image 49 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0073/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ______________ __________ _________________ -0.1774 {'trait' } 0.18387 {'motor' } -0.16521 {'personal' } 0.17243 {'execution' } -0.15853 {'person' } 0.16336 {'movements' } -0.14648 {'depression'} 0.15681 {'sensory' } -0.14269 {'disorder' } 0.15374 {'counting' } -0.13873 {'age' } 0.14576 {'sensorimotor' } -0.13792 {'social' } 0.14173 {'overt' } -0.13749 {'self' } 0.13879 {'tapping' } -0.12882 {'negative' } 0.1384 {'somatosensory'} -0.1258 {'outcome' } 0.13665 {'finger' } Input image 50 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0074/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _________________ __________ ____________________ -0.13397 {'sensory' } 0.15089 {'person' } -0.12064 {'motor' } 0.137 {'self' } -0.11284 {'stimulation' } 0.12291 {'personal' } -0.10103 {'somatosensory'} 0.11905 {'memories' } -0.099248 {'movements' } 0.11774 {'autobiographical'} -0.096852 {'finger' } 0.11511 {'emotion' } -0.096019 {'muscle' } 0.11191 {'social' } -0.091997 {'hand' } 0.11091 {'future' } -0.090801 {'sensorimotor' } 0.11023 {'valence' } -0.088413 {'execution' } 0.10867 {'judgments' } Input image 51 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0078/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _____________ __________ _______________ -0.063138 {'trait' } 0.12515 {'visual' } -0.059521 {'outcome' } 0.098522 {'colour' } -0.058739 {'outcomes' } 0.095129 {'animals' } -0.054237 {'reward' } 0.094519 {'unattended' } -0.052162 {'positive' } 0.08643 {'congruency' } -0.050689 {'choice' } 0.08327 {'audiovisual'} -0.05049 {'sentence' } 0.082528 {'crossmodal' } -0.047789 {'decision' } 0.08249 {'perceptual' } -0.046821 {'sentences'} 0.08098 {'habituation'} -0.046399 {'syntactic'} 0.07864 {'sexual' } Input image 52 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0079/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ______________ __________ ________________ -0.092423 {'visual' } 0.12161 {'reward' } -0.081606 {'object' } 0.11428 {'anticipation'} -0.080473 {'objects' } 0.11082 {'rewards' } -0.076698 {'adaptation'} 0.10753 {'money' } -0.071745 {'motion' } 0.090827 {'monetary' } -0.068727 {'space' } 0.090776 {'choice' } -0.066247 {'training' } 0.089904 {'incentive' } -0.064564 {'locations' } 0.08911 {'choices' } -0.062 {'spatial' } 0.088763 {'outcome' } -0.061747 {'repetition'} 0.080164 {'regulation' } Input image 53 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0080/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ _________________ __________ _______________ -0.14641 {'motor' } 0.13301 {'visual' } -0.12399 {'pain' } 0.11352 {'object' } -0.1211 {'heat' } 0.10836 {'motion' } -0.12084 {'feedback' } 0.10567 {'objects' } -0.11912 {'noxious' } 0.10271 {'spatial' } -0.11718 {'painful' } 0.097137 {'priming' } -0.11246 {'somatosensory'} 0.096209 {'eye' } -0.10941 {'tapping' } 0.092184 {'recognition'} -0.10615 {'finger' } 0.090969 {'development'} -0.1046 {'muscle' } 0.086982 {'judgments' } Input image 54 ./fmri/spm/univariate/model-02_CcEScA/1stLevel/sub-0081/con_0024.nii _____________________________________________________________________ testr_low words_low testr_high words_high _________ ________________ __________ __________________ -0.14996 {'visual' } 0.10785 {'card' } -0.14797 {'movements' } 0.1073 {'regulation' } -0.14619 {'hand' } 0.10631 {'correct' } -0.11626 {'motion' } 0.10554 {'reward' } -0.11595 {'objects' } 0.1021 {'choice' } -0.11485 {'object' } 0.099888 {'decisionmaking'} -0.11305 {'finger' } 0.096574 {'sentences' } -0.10315 {'sensorimotor'} 0.094735 {'outcome' } -0.099258 {'eye' } 0.093978 {'knowledge' } -0.095549 {'motor' } 0.093336 {'rewards' }
r_low = top_feature_tables{1}.testr_low;
r_high = top_feature_tables{1}.testr_high;
highwords = top_feature_tables{1}.words_high;
lowwords = top_feature_tables{1}.words_low;
r_to_plot = [r_high; r_low];
textlabels = [ highwords lowwords]';
create_figure('wedge_plot');
textlabels
textlabels = 2×10 cell
'visual' 'object' 'shape' 'eye' 'objects' 'orientation' 'motion' 'hand' 'spatial' 'attention'
'music' 'musical' 'pitch' 'autobiographical' 'rating' 'auditory' 'acoustic' 'placebo' 'sounds' 'ratings'
hh = tor_wedge_plot(r_to_plot, textlabels, 'outer_circle_radius', .3, 'colors', {[1 .7 0] [.4 0 .8]}, 'bicolor', 'nofigure');